Cubist

Cubist package

boston.cubist <- Cubist::cubist(x = subset(boston, select = -cmedv),
                                y = boston$cmedv, committees = 100)
bostonHousingTest(boston.cubist)

Decision trees

rpart package

boston.rpart <- rpart::rpart(cmedv ~ ., data = boston)
bostonHousingTest(boston.rpart)

party package

boston.ctree <- party::ctree(cmedv ~ ., data = boston)
bostonHousingTest(boston.ctree)

partykit package

boston.ctree2 <- partykit::ctree(cmedv ~ ., data = boston)
bostonHousingTest(boston.ctree2)

Bagging

ipred package

set.seed(101)
boston.ipred <- ipred::bagging(cmedv ~ ., data = boston, nbagg = 500)
bostonHousingTest(boston.ipred)

Random forests

randomForest package

set.seed(101)
boston.rf <- randomForest::randomForest(cmedv ~ ., data = boston)
bostonHousingTest(boston.rf)

party package

set.seed(101)
boston.crf <- party::cforest(cmedv ~ ., data = boston)
bostonHousingTest(boston.crf, quantiles = TRUE)

ranger package

set.seed(101)
boston.ranger <- ranger::ranger(cmedv ~ ., data = boston)
bostonHousingTest(boston.ranger)

Boosting

gbm package

set.seed(101)
boston.gbm <- gbm::gbm(cmedv ~ ., data = boston, distribution = "gaussian",
                       n.trees = 5000, interaction.depth = 3, shrinkage = 0.001,
                       cv.folds = 5)
best.iter <- gbm::gbm.perf(boston.gbm, method = "OOB", plot.it = FALSE)
## Warning in gbm::gbm.perf(boston.gbm, method = "OOB", plot.it = FALSE):
## OOB generally underestimates the optimal number of iterations although
## predictive performance is reasonably competitive. Using cv.folds>0 when
## calling gbm usually results in improved predictive performance.
bostonHousingTest(boston.gbm, n.trees = best.iter)
## Warning in partial.default(object, pred.var = "age", ice = TRUE, train =
## trn, : Recursive method not available for "gbm" objects when `ice = TRUE`.
## Using brute force method instead.
## Warning in partial.default(object, pred.var = "age", ice = TRUE, center =
## TRUE, : Recursive method not available for "gbm" objects when `ice = TRUE`.
## Using brute force method instead.

xgboost package

set.seed(101)
boston.xgb <- xgboost::xgboost(
  data = data.matrix(subset(boston, select = -cmedv)), label = boston$cmedv,
  objective = "reg:linear", nrounds = 5000, max_depth = 3, eta = 0.001,
  colsample_bytree = 0.8, subsample = 0.5,
  save_period = NULL, verbose = 0
)
bostonHousingTest(boston.xgb)

Neural networks

nnet package

set.seed(101)
boston.nnet <- nnet::nnet(cmedv ~ ., data = boston, size = 6, decay = 0.1,
                          linout = TRUE, trace = FALSE)
bostonHousingTest(boston.nnet)

Support vector machines

e1071 package

boston.svm <- e1071::svm(cmedv ~ ., data = boston, type = "eps-regression")
bostonHousingTest(boston.svm)

kernlab package

boston.ksvm <- kernlab::ksvm(cmedv ~ ., data = boston, type= "eps-svr")
bostonHousingTest(boston.ksvm)

Linear/generalized linear models

stats package

boston.lm <- lm(cmedv ~ . ^ 2, data = boston)
bostonHousingTest(boston.lm)

boston.glm <- glm(cmedv ~ . ^ 2, data = boston)
bostonHousingTest(boston.glm)

Multivariate adaptive regression splines

earth package

boston.earth <- earth::earth(cmedv ~ ., degree = 3, data = boston)
bostonHousingTest(boston.earth)

mda package

boston.mars <- mda::mars(x = subset(boston, select = - cmedv), 
                         y = boston$cmedv, degree = 3)
bostonHousingTest(boston.mars)